import numpy as np
import tensorflow.compat.v2 as tf
tf.enable_v2_behavior()
import pandas as pd
from tensorflow import keras
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import RobustScaler
from sklearn.preprocessing import MinMaxScaler
from matplotlib import pyplot
import plotly.graph_objects as go
import math
import seaborn as sns
from sklearn.metrics import mean_squared_error
np.random.seed(1)
tf.random.set_seed(1)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, LSTM, Dropout, RepeatVector, TimeDistributed
from keras import backend
MODELFILENAME = 'MODELS/DNN_3h_TFM'
TIME_STEPS=18 #3h
UNITS=22
DROPOUT=0.779
ACTIVATION='sigmoid'
OPTIMIZER='adamax'
EPOCHS=85
BATCHSIZE=23
VALIDATIONSPLIT=0.2
# Code to read csv file into Colaboratory:
# from google.colab import files
# uploaded = files.upload()
# import io
# df = pd.read_csv(io.BytesIO(uploaded['SentDATA.csv']))
# Dataset is now stored in a Pandas Dataframe
df = pd.read_csv('../../data/dadesTFM.csv')
df.reset_index(inplace=True)
df['Time'] = pd.to_datetime(df['Time'])
df = df.set_index('Time')
columns = ['PM1','PM25','PM10','PM1ATM','PM25ATM','PM10ATM']
df1 = df.copy();
df1 = df1.rename(columns={"PM 1":"PM1","PM 2.5":"PM25","PM 10":"PM10","PM 1 ATM":"PM1ATM","PM 2.5 ATM":"PM25ATM","PM 10 ATM":"PM10ATM"})
df1['PM1'] = df['PM 1'].astype(np.float32)
df1['PM25'] = df['PM 2.5'].astype(np.float32)
df1['PM10'] = df['PM 10'].astype(np.float32)
df1['PM1ATM'] = df['PM 1 ATM'].astype(np.float32)
df1['PM25ATM'] = df['PM 2.5 ATM'].astype(np.float32)
df1['PM10ATM'] = df['PM 10 ATM'].astype(np.float32)
df2 = df1.copy()
train_size = int(len(df2) * 0.8)
test_size = len(df2) - train_size
train, test = df2.iloc[0:train_size], df2.iloc[train_size:len(df2)]
train.shape, test.shape
((3117, 7), (780, 7))
#Standardize the data
for col in columns:
scaler = StandardScaler()
train[col] = scaler.fit_transform(train[[col]])
<ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]])
def create_sequences(X, y, time_steps=TIME_STEPS):
Xs, ys = [], []
for i in range(len(X)-time_steps):
Xs.append(X.iloc[i:(i+time_steps)].values)
ys.append(y.iloc[i+time_steps])
return np.array(Xs), np.array(ys)
X_train, y_train = create_sequences(train[[columns[1]]], train[columns[1]])
#X_test, y_test = create_sequences(test[[columns[1]]], test[columns[1]])
print(f'X_train shape: {X_train.shape}')
print(f'y_train shape: {y_train.shape}')
X_train shape: (3099, 18, 1) y_train shape: (3099,)
#afegir nova mètrica
def rmse(y_true, y_pred):
return backend.sqrt(backend.mean(backend.square(y_pred - y_true), axis=-1))
model = Sequential()
model.add(Dense(units=UNITS, input_shape=(X_train.shape[1], X_train.shape[2]), activation='relu'))
model.add(Dense(16, activation='relu'))
model.add(Dropout(rate=DROPOUT))
model.add(Dense(X_train.shape[2],activation=ACTIVATION))
model.compile(optimizer=OPTIMIZER, loss='mae',metrics=[rmse,'mse'])
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense (Dense) (None, 18, 22) 44 _________________________________________________________________ dense_1 (Dense) (None, 18, 16) 368 _________________________________________________________________ dropout (Dropout) (None, 18, 16) 0 _________________________________________________________________ dense_2 (Dense) (None, 18, 1) 17 ================================================================= Total params: 429 Trainable params: 429 Non-trainable params: 0 _________________________________________________________________
history = model.fit(X_train, y_train, epochs=EPOCHS, batch_size=BATCHSIZE, validation_split=VALIDATIONSPLIT,
callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, mode='min')], shuffle=False)
Epoch 1/85 108/108 [==============================] - 2s 15ms/step - loss: 0.8534 - rmse: 0.8582 - mse: 1.0187 - val_loss: 1.2465 - val_rmse: 1.2470 - val_mse: 1.7565 Epoch 2/85 108/108 [==============================] - 0s 4ms/step - loss: 0.8196 - rmse: 0.8268 - mse: 0.9503 - val_loss: 1.1762 - val_rmse: 1.1772 - val_mse: 1.5816 Epoch 3/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7898 - rmse: 0.8010 - mse: 0.8952 - val_loss: 1.1094 - val_rmse: 1.1110 - val_mse: 1.4250 Epoch 4/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7680 - rmse: 0.7842 - mse: 0.8585 - val_loss: 1.0495 - val_rmse: 1.0518 - val_mse: 1.2926 Epoch 5/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7495 - rmse: 0.7703 - mse: 0.8295 - val_loss: 1.0020 - val_rmse: 1.0050 - val_mse: 1.1930 Epoch 6/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7347 - rmse: 0.7592 - mse: 0.8048 - val_loss: 0.9682 - val_rmse: 0.9719 - val_mse: 1.1250 Epoch 7/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7270 - rmse: 0.7545 - mse: 0.7928 - val_loss: 0.9446 - val_rmse: 0.9488 - val_mse: 1.0791 Epoch 8/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7194 - rmse: 0.7488 - mse: 0.7807 - val_loss: 0.9285 - val_rmse: 0.9331 - val_mse: 1.0484 Epoch 9/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7141 - rmse: 0.7454 - mse: 0.7708 - val_loss: 0.9171 - val_rmse: 0.9220 - val_mse: 1.0272 Epoch 10/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7089 - rmse: 0.7413 - mse: 0.7625 - val_loss: 0.9085 - val_rmse: 0.9136 - val_mse: 1.0114 Epoch 11/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7059 - rmse: 0.7390 - mse: 0.7560 - val_loss: 0.9023 - val_rmse: 0.9076 - val_mse: 1.0002 Epoch 12/85 108/108 [==============================] - 0s 4ms/step - loss: 0.7030 - rmse: 0.7371 - mse: 0.7532 - val_loss: 0.8975 - val_rmse: 0.9030 - val_mse: 0.9917 Epoch 13/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6993 - rmse: 0.7337 - mse: 0.7463 - val_loss: 0.8941 - val_rmse: 0.8997 - val_mse: 0.9857 Epoch 14/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6962 - rmse: 0.7311 - mse: 0.7395 - val_loss: 0.8911 - val_rmse: 0.8969 - val_mse: 0.9804 Epoch 15/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6928 - rmse: 0.7277 - mse: 0.7330 - val_loss: 0.8883 - val_rmse: 0.8942 - val_mse: 0.9757 Epoch 16/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6891 - rmse: 0.7243 - mse: 0.7273 - val_loss: 0.8857 - val_rmse: 0.8917 - val_mse: 0.9712 Epoch 17/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6854 - rmse: 0.7204 - mse: 0.7206 - val_loss: 0.8834 - val_rmse: 0.8895 - val_mse: 0.9674 Epoch 18/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6794 - rmse: 0.7146 - mse: 0.7111 - val_loss: 0.8815 - val_rmse: 0.8877 - val_mse: 0.9642 Epoch 19/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6771 - rmse: 0.7125 - mse: 0.7061 - val_loss: 0.8799 - val_rmse: 0.8861 - val_mse: 0.9616 Epoch 20/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6724 - rmse: 0.7077 - mse: 0.6982 - val_loss: 0.8785 - val_rmse: 0.8848 - val_mse: 0.9593 Epoch 21/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6697 - rmse: 0.7056 - mse: 0.6946 - val_loss: 0.8775 - val_rmse: 0.8838 - val_mse: 0.9576 Epoch 22/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6680 - rmse: 0.7043 - mse: 0.6937 - val_loss: 0.8766 - val_rmse: 0.8830 - val_mse: 0.9562 Epoch 23/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6664 - rmse: 0.7027 - mse: 0.6916 - val_loss: 0.8759 - val_rmse: 0.8823 - val_mse: 0.9551 Epoch 24/85 108/108 [==============================] - 1s 5ms/step - loss: 0.6627 - rmse: 0.6991 - mse: 0.6856 - val_loss: 0.8752 - val_rmse: 0.8816 - val_mse: 0.9539 Epoch 25/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6616 - rmse: 0.6982 - mse: 0.6845 - val_loss: 0.8746 - val_rmse: 0.8811 - val_mse: 0.9530 Epoch 26/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6584 - rmse: 0.6950 - mse: 0.6799 - val_loss: 0.8740 - val_rmse: 0.8805 - val_mse: 0.9522 Epoch 27/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6553 - rmse: 0.6919 - mse: 0.6733 - val_loss: 0.8736 - val_rmse: 0.8801 - val_mse: 0.9515 Epoch 28/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6525 - rmse: 0.6891 - mse: 0.6698 - val_loss: 0.8731 - val_rmse: 0.8797 - val_mse: 0.9508 Epoch 29/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6509 - rmse: 0.6878 - mse: 0.6681 - val_loss: 0.8728 - val_rmse: 0.8794 - val_mse: 0.9503 Epoch 30/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6473 - rmse: 0.6840 - mse: 0.6613 - val_loss: 0.8725 - val_rmse: 0.8791 - val_mse: 0.9499 Epoch 31/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6474 - rmse: 0.6845 - mse: 0.6616 - val_loss: 0.8723 - val_rmse: 0.8790 - val_mse: 0.9496 Epoch 32/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6472 - rmse: 0.6846 - mse: 0.6611 - val_loss: 0.8721 - val_rmse: 0.8787 - val_mse: 0.9493 Epoch 33/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6469 - rmse: 0.6838 - mse: 0.6617 - val_loss: 0.8719 - val_rmse: 0.8786 - val_mse: 0.9491 Epoch 34/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6458 - rmse: 0.6829 - mse: 0.6604 - val_loss: 0.8718 - val_rmse: 0.8785 - val_mse: 0.9489 Epoch 35/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6463 - rmse: 0.6835 - mse: 0.6625 - val_loss: 0.8717 - val_rmse: 0.8783 - val_mse: 0.9487 Epoch 36/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6450 - rmse: 0.6824 - mse: 0.6583 - val_loss: 0.8716 - val_rmse: 0.8782 - val_mse: 0.9486 Epoch 37/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6436 - rmse: 0.6804 - mse: 0.6565 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9485 Epoch 38/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6436 - rmse: 0.6805 - mse: 0.6582 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9485 Epoch 39/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6426 - rmse: 0.6800 - mse: 0.6565 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 40/85 108/108 [==============================] - 0s 3ms/step - loss: 0.6421 - rmse: 0.6795 - mse: 0.6551 - val_loss: 0.8716 - val_rmse: 0.8783 - val_mse: 0.9485 Epoch 41/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6430 - rmse: 0.6805 - mse: 0.6578 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 42/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6433 - rmse: 0.6804 - mse: 0.6560 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 43/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6417 - rmse: 0.6792 - mse: 0.6564 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 44/85 108/108 [==============================] - 0s 5ms/step - loss: 0.6415 - rmse: 0.6788 - mse: 0.6547 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 45/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6407 - rmse: 0.6786 - mse: 0.6554 - val_loss: 0.8714 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 46/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6413 - rmse: 0.6783 - mse: 0.6545 - val_loss: 0.8714 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 47/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6399 - rmse: 0.6770 - mse: 0.6521 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 48/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6400 - rmse: 0.6772 - mse: 0.6531 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 49/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6395 - rmse: 0.6761 - mse: 0.6532 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 50/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6386 - rmse: 0.6761 - mse: 0.6520 - val_loss: 0.8715 - val_rmse: 0.8782 - val_mse: 0.9484 Epoch 51/85 108/108 [==============================] - 0s 4ms/step - loss: 0.6400 - rmse: 0.6777 - mse: 0.6543 - val_loss: 0.8715 - val_rmse: 0.8783 - val_mse: 0.9485
import matplotlib.pyplot as plt
plt.plot(history.history['loss'], label='MAE Training loss')
plt.plot(history.history['val_loss'], label='MAE Validation loss')
plt.plot(history.history['mse'], label='MSE Training loss')
plt.plot(history.history['val_mse'], label='MSE Validation loss')
plt.plot(history.history['rmse'], label='RMSE Training loss')
plt.plot(history.history['val_rmse'], label='RMSE Validation loss')
plt.legend();
X_train_pred = model.predict(X_train, verbose=0)
train_mae_loss = np.mean(np.abs(X_train_pred - X_train), axis=1)
plt.hist(train_mae_loss, bins=50)
plt.xlabel('Train MAE loss')
plt.ylabel('Number of Samples');
def evaluate_prediction(predictions, actual, model_name):
errors = predictions - actual
mse = np.square(errors).mean()
rmse = np.sqrt(mse)
mae = np.abs(errors).mean()
print(model_name + ':')
print('Mean Absolute Error: {:.4f}'.format(mae))
print('Root Mean Square Error: {:.4f}'.format(rmse))
print('Mean Square Error: {:.4f}'.format(mse))
print('')
return mae,rmse,mse
mae,rmse,mse = evaluate_prediction(X_train_pred, X_train,"LSTM")
LSTM: Mean Absolute Error: 0.5376 Root Mean Square Error: 0.7184 Mean Square Error: 0.5161
model.save(MODELFILENAME+'.h5')
#càlcul del threshold de test
def calculate_threshold(X_test, X_test_pred):
distance = np.sqrt(np.mean(np.square(X_test_pred - X_test),axis=1))
"""Sorting the scores/diffs and using a 0.80 as cutoff value to pick the threshold"""
distance.sort();
cut_off = int(0.95 * len(distance));
threshold = distance[cut_off];
return threshold
for col in columns:
print ("####################### "+col +" ###########################")
#Standardize the test data
scaler = StandardScaler()
test_cpy = test.copy()
test[col] = scaler.fit_transform(test[[col]])
#creem seqüencia amb finestra temporal per les dades de test
X_test1, y_test1 = create_sequences(test[[col]], test[col])
print(f'Testing shape: {X_test1.shape}')
#evaluem el model
eval = model.evaluate(X_test1, y_test1)
print("evaluate: ",eval)
#predim el model
X_test1_pred = model.predict(X_test1, verbose=0)
evaluate_prediction(X_test1_pred, X_test1,"LSTM")
#càlcul del mae_loss
test1_mae_loss = np.mean(np.abs(X_test1_pred - X_test1), axis=1)
test1_rmse_loss = np.sqrt(np.mean(np.square(X_test1_pred - X_test1),axis=1))
# reshaping test prediction
X_test1_predReshape = X_test1_pred.reshape((X_test1_pred.shape[0] * X_test1_pred.shape[1]), X_test1_pred.shape[2])
# reshaping test data
X_test1Reshape = X_test1.reshape((X_test1.shape[0] * X_test1.shape[1]), X_test1.shape[2])
threshold_test = calculate_threshold(X_test1Reshape,X_test1_predReshape)
test1_score_df = pd.DataFrame(test[TIME_STEPS:])
test1_score_df['loss'] = test1_rmse_loss.reshape((-1))
test1_score_df['threshold'] = threshold_test
test1_score_df['anomaly'] = test1_score_df['loss'] > test1_score_df['threshold']
test1_score_df[col] = test[TIME_STEPS:][col]
#gràfic test lost i threshold
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['loss'], name='Test loss'))
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['threshold'], name='Threshold'))
fig.update_layout(showlegend=True, title='Test loss vs. Threshold')
fig.show()
#Posem les anomalies en un array
anomalies1 = test1_score_df.loc[test1_score_df['anomaly'] == True]
anomalies1.shape
print('anomalies: ',anomalies1.shape); print();
#Gràfic dels punts i de les anomalíes amb els valors de dades transformades per verificar que la normalització que s'ha fet no distorssiona les dades
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=scaler.inverse_transform(test1_score_df[col]), name=col))
fig.add_trace(go.Scatter(x=anomalies1.index, y=scaler.inverse_transform(anomalies1[col]), mode='markers', name='Anomaly'))
fig.update_layout(showlegend=True, title='Detected anomalies')
fig.show()
print ("######################################################")
####################### PM1 ###########################
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy test[col] = scaler.fit_transform(test[[col]])
Testing shape: (762, 18, 1) 24/24 [==============================] - 0s 2ms/step - loss: 0.6667 - rmse: 0.7020 - mse: 0.8554 evaluate: [0.6667256951332092, 0.7020331025123596, 0.8554317355155945] LSTM: Mean Absolute Error: 0.4955 Root Mean Square Error: 0.7473 Mean Square Error: 0.5585
anomalies: (59, 10)
###################################################### ####################### PM25 ###########################
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Testing shape: (762, 18, 1) 24/24 [==============================] - 0s 3ms/step - loss: 0.6850 - rmse: 0.7213 - mse: 0.8332 evaluate: [0.6850315928459167, 0.7212645411491394, 0.8332288861274719] LSTM: Mean Absolute Error: 0.5103 Root Mean Square Error: 0.7353 Mean Square Error: 0.5407
anomalies: (99, 10)
###################################################### ####################### PM10 ###########################
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Testing shape: (762, 18, 1) 24/24 [==============================] - 0s 3ms/step - loss: 0.7073 - rmse: 0.7429 - mse: 0.8173 evaluate: [0.7073282599449158, 0.7428707480430603, 0.8172751665115356] LSTM: Mean Absolute Error: 0.5311 Root Mean Square Error: 0.7178 Mean Square Error: 0.5153
anomalies: (66, 10)
###################################################### ####################### PM1ATM ###########################
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Testing shape: (762, 18, 1) 24/24 [==============================] - 0s 2ms/step - loss: 0.7150 - rmse: 0.7514 - mse: 0.8300 evaluate: [0.7150251865386963, 0.7514424324035645, 0.8300292491912842] LSTM: Mean Absolute Error: 0.5347 Root Mean Square Error: 0.7152 Mean Square Error: 0.5115
anomalies: (61, 10)
###################################################### ####################### PM25ATM ###########################
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Testing shape: (762, 18, 1) 24/24 [==============================] - 0s 3ms/step - loss: 0.7081 - rmse: 0.7445 - mse: 0.8335 evaluate: [0.7081026434898376, 0.744504988193512, 0.8335058689117432] LSTM: Mean Absolute Error: 0.5289 Root Mean Square Error: 0.7200 Mean Square Error: 0.5185
anomalies: (61, 10)
###################################################### ####################### PM10ATM ###########################
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Testing shape: (762, 18, 1) 24/24 [==============================] - 0s 3ms/step - loss: 0.6962 - rmse: 0.7323 - mse: 0.8269 evaluate: [0.6962459683418274, 0.7323484420776367, 0.8269268870353699] LSTM: Mean Absolute Error: 0.5187 Root Mean Square Error: 0.7269 Mean Square Error: 0.5283
anomalies: (61, 10)
######################################################